import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib as plt
import seaborn as sns
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.layers import Dense
from sklearn.model_selection import train_test_split
heart_data = pd.read_csv(r"heart.csv")
heart_data.sample(10)
| Age | Sex | ChestPainType | RestingBP | Cholesterol | FastingBS | RestingECG | MaxHR | ExerciseAngina | Oldpeak | ST_Slope | HeartDisease | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 604 | 68 | M | NAP | 134 | 254 | 1 | Normal | 151 | Y | 0.0 | Up | 0 |
| 374 | 61 | M | ASY | 125 | 0 | 0 | Normal | 105 | Y | 0.0 | Down | 1 |
| 11 | 58 | M | ATA | 136 | 164 | 0 | ST | 99 | Y | 2.0 | Flat | 1 |
| 799 | 53 | M | NAP | 130 | 246 | 1 | LVH | 173 | N | 0.0 | Up | 0 |
| 522 | 50 | M | ASY | 144 | 349 | 0 | LVH | 120 | Y | 1.0 | Up | 1 |
| 848 | 52 | M | ASY | 128 | 255 | 0 | Normal | 161 | Y | 0.0 | Up | 1 |
| 199 | 57 | F | TA | 130 | 308 | 0 | Normal | 98 | N | 1.0 | Flat | 0 |
| 291 | 47 | F | ATA | 140 | 257 | 0 | Normal | 135 | N | 1.0 | Up | 0 |
| 702 | 59 | M | TA | 178 | 270 | 0 | LVH | 145 | N | 4.2 | Down | 0 |
| 771 | 55 | M | ASY | 140 | 217 | 0 | Normal | 111 | Y | 5.6 | Down | 1 |
heart_data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 918 entries, 0 to 917 Data columns (total 12 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Age 918 non-null int64 1 Sex 918 non-null object 2 ChestPainType 918 non-null object 3 RestingBP 918 non-null int64 4 Cholesterol 918 non-null int64 5 FastingBS 918 non-null int64 6 RestingECG 918 non-null object 7 MaxHR 918 non-null int64 8 ExerciseAngina 918 non-null object 9 Oldpeak 918 non-null float64 10 ST_Slope 918 non-null object 11 HeartDisease 918 non-null int64 dtypes: float64(1), int64(6), object(5) memory usage: 86.2+ KB
heart_data.isnull().sum()
Age 0 Sex 0 ChestPainType 0 RestingBP 0 Cholesterol 0 FastingBS 0 RestingECG 0 MaxHR 0 ExerciseAngina 0 Oldpeak 0 ST_Slope 0 HeartDisease 0 dtype: int64
heart_data.columns
Index(['Age', 'Sex', 'ChestPainType', 'RestingBP', 'Cholesterol', 'FastingBS',
'RestingECG', 'MaxHR', 'ExerciseAngina', 'Oldpeak', 'ST_Slope',
'HeartDisease'],
dtype='object')
heart_data.describe()
| Age | RestingBP | Cholesterol | FastingBS | MaxHR | Oldpeak | HeartDisease | |
|---|---|---|---|---|---|---|---|
| count | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 |
| mean | 53.510893 | 132.396514 | 198.799564 | 0.233115 | 136.809368 | 0.887364 | 0.553377 |
| std | 9.432617 | 18.514154 | 109.384145 | 0.423046 | 25.460334 | 1.066570 | 0.497414 |
| min | 28.000000 | 0.000000 | 0.000000 | 0.000000 | 60.000000 | -2.600000 | 0.000000 |
| 25% | 47.000000 | 120.000000 | 173.250000 | 0.000000 | 120.000000 | 0.000000 | 0.000000 |
| 50% | 54.000000 | 130.000000 | 223.000000 | 0.000000 | 138.000000 | 0.600000 | 1.000000 |
| 75% | 60.000000 | 140.000000 | 267.000000 | 0.000000 | 156.000000 | 1.500000 | 1.000000 |
| max | 77.000000 | 200.000000 | 603.000000 | 1.000000 | 202.000000 | 6.200000 | 1.000000 |
heart_data["Sex"] = heart_data["Sex"].replace({"F":0,'M':1})
heart_data["ChestPainType"] = heart_data["ChestPainType"].replace({"ASY":0,"ATA":1,"NAP":2,"TA":3})
heart_data["RestingBP"] = np.where(heart_data["RestingBP"] < 120, 0, 1)
heart_data["ExerciseAngina"] = heart_data["ExerciseAngina"].replace({"N":0,'Y':1})
heart_data["RestingECG"] = heart_data["RestingECG"].replace({'Normal':0,"ST":1,"LVH":2})
heart_data["ST_Slope"] = heart_data["ST_Slope"].replace({"Flat":0,'Up':1})
heart_data.head(10)
| Age | Sex | ChestPainType | RestingBP | Cholesterol | FastingBS | RestingECG | MaxHR | ExerciseAngina | Oldpeak | ST_Slope | HeartDisease | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 40 | 1 | 1 | 1 | 289 | 0 | 0 | 172 | 0 | 0.0 | 1 | 0 |
| 1 | 49 | 0 | 2 | 1 | 180 | 0 | 0 | 156 | 0 | 1.0 | 0 | 1 |
| 2 | 37 | 1 | 1 | 1 | 283 | 0 | 1 | 98 | 0 | 0.0 | 1 | 0 |
| 3 | 48 | 0 | 0 | 1 | 214 | 0 | 0 | 108 | 1 | 1.5 | 0 | 1 |
| 4 | 54 | 1 | 2 | 1 | 195 | 0 | 0 | 122 | 0 | 0.0 | 1 | 0 |
| 5 | 39 | 1 | 2 | 1 | 339 | 0 | 0 | 170 | 0 | 0.0 | 1 | 0 |
| 6 | 45 | 0 | 1 | 1 | 237 | 0 | 0 | 170 | 0 | 0.0 | 1 | 0 |
| 7 | 54 | 1 | 1 | 0 | 208 | 0 | 0 | 142 | 0 | 0.0 | 1 | 0 |
| 8 | 37 | 1 | 0 | 1 | 207 | 0 | 0 | 130 | 1 | 1.5 | 0 | 1 |
| 9 | 48 | 0 | 1 | 1 | 284 | 0 | 0 | 120 | 0 | 0.0 | 1 | 0 |
heart_data.sample(10)
| Age | Sex | ChestPainType | RestingBP | Cholesterol | FastingBS | RestingECG | MaxHR | ExerciseAngina | Oldpeak | ST_Slope | HeartDisease | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 481 | 69 | 1 | 2 | 1 | 0 | 1 | 1 | 118 | 0 | 2.5 | Down | 1 |
| 887 | 43 | 1 | 0 | 1 | 247 | 1 | 2 | 143 | 1 | 0.1 | 0 | 1 |
| 130 | 38 | 1 | 2 | 1 | 292 | 0 | 0 | 130 | 0 | 0.0 | 1 | 0 |
| 57 | 58 | 1 | 2 | 1 | 213 | 0 | 1 | 140 | 0 | 0.0 | 0 | 1 |
| 874 | 43 | 1 | 0 | 1 | 247 | 0 | 0 | 171 | 0 | 1.5 | 1 | 0 |
| 513 | 62 | 1 | 3 | 0 | 258 | 0 | 1 | 150 | 1 | 1.3 | 0 | 1 |
| 145 | 39 | 1 | 0 | 0 | 273 | 0 | 0 | 132 | 0 | 0.0 | 1 | 0 |
| 263 | 59 | 1 | 0 | 1 | 126 | 0 | 0 | 125 | 0 | 0.0 | 0 | 1 |
| 131 | 46 | 1 | 0 | 0 | 202 | 0 | 0 | 150 | 1 | 0.0 | 0 | 1 |
| 655 | 40 | 1 | 0 | 1 | 223 | 0 | 0 | 181 | 0 | 0.0 | 1 | 1 |
import matplotlib.pyplot as plt
categorical_vars = ['Sex', 'ChestPainType', 'FastingBS', 'RestingECG', 'ExerciseAngina', 'ST_Slope']
plt.figure(figsize=(12, 8))
for i, var in enumerate(categorical_vars):
plt.subplot(2, 3, i+1)
sns.countplot(data=heart_data, x=var)
plt.xlabel(var)
plt.tight_layout()
plt.show()
sns.countplot(data=heart_data, x="HeartDisease",hue="Sex")
<Axes: xlabel='HeartDisease', ylabel='count'>
heart_data_corr = heart_data[["HeartDisease","Sex","ChestPainType","RestingBP", "FastingBS", "RestingECG", "ExerciseAngina"]].corr()
heart_data_corr
plt.figure(figsize = (20,10))
sns.heatmap(heart_data_corr, cmap='crest', annot=True)
<Axes: >
import matplotlib.pyplot as plt
plt.figure(figsize=(13, 7))
sns.boxplot(data=heart_data_corr)
<Axes: >
heart_data.describe()
| Age | Sex | ChestPainType | RestingBP | Cholesterol | FastingBS | RestingECG | MaxHR | ExerciseAngina | Oldpeak | HeartDisease | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 |
| mean | 53.510893 | 0.789760 | 0.781046 | 0.824619 | 198.799564 | 0.233115 | 0.603486 | 136.809368 | 0.404139 | 0.887364 | 0.553377 |
| std | 9.432617 | 0.407701 | 0.956519 | 0.380500 | 109.384145 | 0.423046 | 0.805968 | 25.460334 | 0.490992 | 1.066570 | 0.497414 |
| min | 28.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 60.000000 | 0.000000 | -2.600000 | 0.000000 |
| 25% | 47.000000 | 1.000000 | 0.000000 | 1.000000 | 173.250000 | 0.000000 | 0.000000 | 120.000000 | 0.000000 | 0.000000 | 0.000000 |
| 50% | 54.000000 | 1.000000 | 0.000000 | 1.000000 | 223.000000 | 0.000000 | 0.000000 | 138.000000 | 0.000000 | 0.600000 | 1.000000 |
| 75% | 60.000000 | 1.000000 | 2.000000 | 1.000000 | 267.000000 | 0.000000 | 1.000000 | 156.000000 | 1.000000 | 1.500000 | 1.000000 |
| max | 77.000000 | 1.000000 | 3.000000 | 1.000000 | 603.000000 | 1.000000 | 2.000000 | 202.000000 | 1.000000 | 6.200000 | 1.000000 |
from pandas_profiling import ProfileReport
heart_data = pd.read_csv(r"heart.csv")
report = ProfileReport(heart_data)
display(report)
C:\Users\AFC\AppData\Local\Temp\ipykernel_1460\363051215.py:1: DeprecationWarning: `import pandas_profiling` is going to be deprecated by April 1st. Please use `import ydata_profiling` instead. from pandas_profiling import ProfileReport
Summarize dataset: 0%| | 0/5 [00:00<?, ?it/s]
Generate report structure: 0%| | 0/1 [00:00<?, ?it/s]
Render HTML: 0%| | 0/1 [00:00<?, ?it/s]
x = heart_data[["Sex","ChestPainType","RestingBP", "FastingBS", "RestingECG", "ExerciseAngina"]]
y = heart_data["HeartDisease"]
X_train, X_test, Y_train, Y_test = train_test_split(x,y, test_size = 0.9, random_state = 42)
model = Sequential([Dense(64, activation = "relu"),
Dense(32, activation = "relu"),
Dense(1, activation = "sigmoid")
])
optimizer = Adam(learning_rate=0.01)
model.compile(loss = "binary_crossentropy", optimizer = optimizer, metrics = ["accuracy"])
history = model.fit(X_train, Y_train, validation_data=(X_test, Y_test), epochs=100)
Epoch 1/100 3/3 [==============================] - 1s 111ms/step - loss: 0.6864 - accuracy: 0.5165 - val_loss: 0.6179 - val_accuracy: 0.7400 Epoch 2/100 3/3 [==============================] - 0s 32ms/step - loss: 0.6144 - accuracy: 0.7363 - val_loss: 0.5779 - val_accuracy: 0.7703 Epoch 3/100 3/3 [==============================] - 0s 33ms/step - loss: 0.5701 - accuracy: 0.7253 - val_loss: 0.5241 - val_accuracy: 0.7763 Epoch 4/100 3/3 [==============================] - 0s 34ms/step - loss: 0.5360 - accuracy: 0.7473 - val_loss: 0.5342 - val_accuracy: 0.7775 Epoch 5/100 3/3 [==============================] - 0s 30ms/step - loss: 0.5152 - accuracy: 0.7582 - val_loss: 0.5395 - val_accuracy: 0.7352 Epoch 6/100 3/3 [==============================] - 0s 31ms/step - loss: 0.4850 - accuracy: 0.7582 - val_loss: 0.5277 - val_accuracy: 0.7823 Epoch 7/100 3/3 [==============================] - 0s 35ms/step - loss: 0.4806 - accuracy: 0.7582 - val_loss: 0.5685 - val_accuracy: 0.7485 Epoch 8/100 3/3 [==============================] - 0s 28ms/step - loss: 0.4584 - accuracy: 0.7912 - val_loss: 0.5674 - val_accuracy: 0.7437 Epoch 9/100 3/3 [==============================] - 0s 33ms/step - loss: 0.4500 - accuracy: 0.7912 - val_loss: 0.5846 - val_accuracy: 0.7400 Epoch 10/100 3/3 [==============================] - 0s 35ms/step - loss: 0.4442 - accuracy: 0.8022 - val_loss: 0.6168 - val_accuracy: 0.7437 Epoch 11/100 3/3 [==============================] - 0s 34ms/step - loss: 0.4353 - accuracy: 0.8022 - val_loss: 0.6072 - val_accuracy: 0.7388 Epoch 12/100 3/3 [==============================] - 0s 30ms/step - loss: 0.4262 - accuracy: 0.8022 - val_loss: 0.6172 - val_accuracy: 0.7437 Epoch 13/100 3/3 [==============================] - 0s 40ms/step - loss: 0.4250 - accuracy: 0.8022 - val_loss: 0.6298 - val_accuracy: 0.7437 Epoch 14/100 3/3 [==============================] - 0s 40ms/step - loss: 0.4128 - accuracy: 0.8132 - val_loss: 0.6422 - val_accuracy: 0.7461 Epoch 15/100 3/3 [==============================] - 0s 40ms/step - loss: 0.4014 - accuracy: 0.8132 - val_loss: 0.6283 - val_accuracy: 0.7461 Epoch 16/100 3/3 [==============================] - 0s 39ms/step - loss: 0.3954 - accuracy: 0.8132 - val_loss: 0.6252 - val_accuracy: 0.7437 Epoch 17/100 3/3 [==============================] - 0s 47ms/step - loss: 0.3920 - accuracy: 0.8242 - val_loss: 0.6584 - val_accuracy: 0.7437 Epoch 18/100 3/3 [==============================] - 0s 36ms/step - loss: 0.3815 - accuracy: 0.8242 - val_loss: 0.6702 - val_accuracy: 0.7412 Epoch 19/100 3/3 [==============================] - 0s 27ms/step - loss: 0.3702 - accuracy: 0.8242 - val_loss: 0.6729 - val_accuracy: 0.7388 Epoch 20/100 3/3 [==============================] - 0s 31ms/step - loss: 0.3630 - accuracy: 0.8242 - val_loss: 0.6910 - val_accuracy: 0.7400 Epoch 21/100 3/3 [==============================] - 0s 31ms/step - loss: 0.3587 - accuracy: 0.8462 - val_loss: 0.7044 - val_accuracy: 0.7267 Epoch 22/100 3/3 [==============================] - 0s 30ms/step - loss: 0.3452 - accuracy: 0.8462 - val_loss: 0.6972 - val_accuracy: 0.7279 Epoch 23/100 3/3 [==============================] - 0s 33ms/step - loss: 0.3385 - accuracy: 0.8462 - val_loss: 0.7159 - val_accuracy: 0.7255 Epoch 24/100 3/3 [==============================] - 0s 32ms/step - loss: 0.3418 - accuracy: 0.8462 - val_loss: 0.7616 - val_accuracy: 0.7267 Epoch 25/100 3/3 [==============================] - 0s 31ms/step - loss: 0.3285 - accuracy: 0.8462 - val_loss: 0.7465 - val_accuracy: 0.7304 Epoch 26/100 3/3 [==============================] - 0s 31ms/step - loss: 0.3175 - accuracy: 0.8571 - val_loss: 0.7321 - val_accuracy: 0.7654 Epoch 27/100 3/3 [==============================] - 0s 30ms/step - loss: 0.3253 - accuracy: 0.8462 - val_loss: 0.7399 - val_accuracy: 0.7557 Epoch 28/100 3/3 [==============================] - 0s 30ms/step - loss: 0.3094 - accuracy: 0.8571 - val_loss: 0.8569 - val_accuracy: 0.6989 Epoch 29/100 3/3 [==============================] - 0s 33ms/step - loss: 0.3239 - accuracy: 0.8242 - val_loss: 0.8150 - val_accuracy: 0.7255 Epoch 30/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2823 - accuracy: 0.8791 - val_loss: 0.7818 - val_accuracy: 0.7170 Epoch 31/100 3/3 [==============================] - 0s 31ms/step - loss: 0.3149 - accuracy: 0.8681 - val_loss: 0.8156 - val_accuracy: 0.7570 Epoch 32/100 3/3 [==============================] - 0s 32ms/step - loss: 0.3180 - accuracy: 0.8571 - val_loss: 0.9841 - val_accuracy: 0.6856 Epoch 33/100 3/3 [==============================] - 0s 32ms/step - loss: 0.2955 - accuracy: 0.8791 - val_loss: 0.8464 - val_accuracy: 0.7267 Epoch 34/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2828 - accuracy: 0.8791 - val_loss: 0.8204 - val_accuracy: 0.7207 Epoch 35/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2759 - accuracy: 0.8791 - val_loss: 0.9081 - val_accuracy: 0.7557 Epoch 36/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2811 - accuracy: 0.8462 - val_loss: 0.9614 - val_accuracy: 0.6917 Epoch 37/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2753 - accuracy: 0.8571 - val_loss: 0.9268 - val_accuracy: 0.7243 Epoch 38/100 3/3 [==============================] - 0s 35ms/step - loss: 0.2655 - accuracy: 0.8681 - val_loss: 0.9432 - val_accuracy: 0.6904 Epoch 39/100 3/3 [==============================] - 0s 37ms/step - loss: 0.2615 - accuracy: 0.8681 - val_loss: 0.9607 - val_accuracy: 0.6880 Epoch 40/100 3/3 [==============================] - 0s 34ms/step - loss: 0.2580 - accuracy: 0.8681 - val_loss: 0.9876 - val_accuracy: 0.6699 Epoch 41/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2573 - accuracy: 0.8681 - val_loss: 0.9870 - val_accuracy: 0.6651 Epoch 42/100 3/3 [==============================] - 0s 32ms/step - loss: 0.2562 - accuracy: 0.8681 - val_loss: 1.0309 - val_accuracy: 0.7219 Epoch 43/100 3/3 [==============================] - 0s 32ms/step - loss: 0.2536 - accuracy: 0.8571 - val_loss: 1.0502 - val_accuracy: 0.6711 Epoch 44/100 3/3 [==============================] - 0s 32ms/step - loss: 0.2489 - accuracy: 0.8571 - val_loss: 1.0319 - val_accuracy: 0.7582 Epoch 45/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2464 - accuracy: 0.8791 - val_loss: 1.0343 - val_accuracy: 0.7582 Epoch 46/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2524 - accuracy: 0.8681 - val_loss: 1.0891 - val_accuracy: 0.6832 Epoch 47/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2434 - accuracy: 0.8791 - val_loss: 1.0878 - val_accuracy: 0.6977 Epoch 48/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2445 - accuracy: 0.8681 - val_loss: 1.0765 - val_accuracy: 0.7013 Epoch 49/100 3/3 [==============================] - 0s 33ms/step - loss: 0.2439 - accuracy: 0.8791 - val_loss: 1.1374 - val_accuracy: 0.7146 Epoch 50/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2438 - accuracy: 0.8681 - val_loss: 1.1641 - val_accuracy: 0.7146 Epoch 51/100 3/3 [==============================] - 0s 32ms/step - loss: 0.2665 - accuracy: 0.8462 - val_loss: 1.2124 - val_accuracy: 0.6239 Epoch 52/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2300 - accuracy: 0.9011 - val_loss: 1.1176 - val_accuracy: 0.6929 Epoch 53/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2661 - accuracy: 0.8791 - val_loss: 1.0890 - val_accuracy: 0.7243 Epoch 54/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2432 - accuracy: 0.8791 - val_loss: 1.1934 - val_accuracy: 0.7158 Epoch 55/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2575 - accuracy: 0.8681 - val_loss: 1.2008 - val_accuracy: 0.7146 Epoch 56/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2400 - accuracy: 0.8791 - val_loss: 1.1448 - val_accuracy: 0.6904 Epoch 57/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2438 - accuracy: 0.8791 - val_loss: 1.1519 - val_accuracy: 0.6941 Epoch 58/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2411 - accuracy: 0.8791 - val_loss: 1.1633 - val_accuracy: 0.6820 Epoch 59/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2564 - accuracy: 0.8571 - val_loss: 1.2091 - val_accuracy: 0.6832 Epoch 60/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2469 - accuracy: 0.8791 - val_loss: 1.1638 - val_accuracy: 0.7243 Epoch 61/100 3/3 [==============================] - 0s 27ms/step - loss: 0.2327 - accuracy: 0.8791 - val_loss: 1.1629 - val_accuracy: 0.6880 Epoch 62/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2545 - accuracy: 0.8791 - val_loss: 1.1968 - val_accuracy: 0.6892 Epoch 63/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2380 - accuracy: 0.8791 - val_loss: 1.2883 - val_accuracy: 0.6191 Epoch 64/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2502 - accuracy: 0.8571 - val_loss: 1.3090 - val_accuracy: 0.7158 Epoch 65/100 3/3 [==============================] - 0s 27ms/step - loss: 0.2445 - accuracy: 0.8681 - val_loss: 1.2319 - val_accuracy: 0.6904 Epoch 66/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2370 - accuracy: 0.8791 - val_loss: 1.2348 - val_accuracy: 0.6965 Epoch 67/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2420 - accuracy: 0.8571 - val_loss: 1.2604 - val_accuracy: 0.7243 Epoch 68/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2358 - accuracy: 0.8681 - val_loss: 1.2566 - val_accuracy: 0.6868 Epoch 69/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2322 - accuracy: 0.8681 - val_loss: 1.2711 - val_accuracy: 0.6796 Epoch 70/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2395 - accuracy: 0.8681 - val_loss: 1.2848 - val_accuracy: 0.6771 Epoch 71/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2339 - accuracy: 0.8901 - val_loss: 1.2540 - val_accuracy: 0.6965 Epoch 72/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2379 - accuracy: 0.8791 - val_loss: 1.2360 - val_accuracy: 0.6941 Epoch 73/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2358 - accuracy: 0.8791 - val_loss: 1.3021 - val_accuracy: 0.7207 Epoch 74/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2301 - accuracy: 0.8791 - val_loss: 1.3573 - val_accuracy: 0.6856 Epoch 75/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2408 - accuracy: 0.8352 - val_loss: 1.3279 - val_accuracy: 0.6614 Epoch 76/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2308 - accuracy: 0.8681 - val_loss: 1.2865 - val_accuracy: 0.7025 Epoch 77/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2380 - accuracy: 0.8571 - val_loss: 1.2855 - val_accuracy: 0.7545 Epoch 78/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2338 - accuracy: 0.8791 - val_loss: 1.3088 - val_accuracy: 0.7473 Epoch 79/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2355 - accuracy: 0.8681 - val_loss: 1.3058 - val_accuracy: 0.7473 Epoch 80/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2356 - accuracy: 0.8681 - val_loss: 1.3164 - val_accuracy: 0.6868 Epoch 81/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2323 - accuracy: 0.8681 - val_loss: 1.3451 - val_accuracy: 0.6614 Epoch 82/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2329 - accuracy: 0.8791 - val_loss: 1.3681 - val_accuracy: 0.6941 Epoch 83/100 3/3 [==============================] - 0s 27ms/step - loss: 0.2328 - accuracy: 0.8791 - val_loss: 1.3516 - val_accuracy: 0.7449 Epoch 84/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2444 - accuracy: 0.8681 - val_loss: 1.3484 - val_accuracy: 0.7557 Epoch 85/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2420 - accuracy: 0.8352 - val_loss: 1.4151 - val_accuracy: 0.6324 Epoch 86/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2394 - accuracy: 0.8681 - val_loss: 1.3909 - val_accuracy: 0.6578 Epoch 87/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2385 - accuracy: 0.8571 - val_loss: 1.3621 - val_accuracy: 0.7025 Epoch 88/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2370 - accuracy: 0.8791 - val_loss: 1.3289 - val_accuracy: 0.7545 Epoch 89/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2400 - accuracy: 0.8791 - val_loss: 1.3342 - val_accuracy: 0.7545 Epoch 90/100 3/3 [==============================] - 0s 27ms/step - loss: 0.2345 - accuracy: 0.8901 - val_loss: 1.3759 - val_accuracy: 0.7219 Epoch 91/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2283 - accuracy: 0.8791 - val_loss: 1.3905 - val_accuracy: 0.6687 Epoch 92/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2716 - accuracy: 0.8681 - val_loss: 1.3871 - val_accuracy: 0.6626 Epoch 93/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2438 - accuracy: 0.8571 - val_loss: 1.4633 - val_accuracy: 0.6771 Epoch 94/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2430 - accuracy: 0.8681 - val_loss: 1.4174 - val_accuracy: 0.7134 Epoch 95/100 3/3 [==============================] - 0s 31ms/step - loss: 0.2454 - accuracy: 0.8571 - val_loss: 1.3184 - val_accuracy: 0.7062 Epoch 96/100 3/3 [==============================] - 0s 30ms/step - loss: 0.2354 - accuracy: 0.8791 - val_loss: 1.3106 - val_accuracy: 0.7025 Epoch 97/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2347 - accuracy: 0.8791 - val_loss: 1.3137 - val_accuracy: 0.7545 Epoch 98/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2316 - accuracy: 0.8791 - val_loss: 1.3610 - val_accuracy: 0.6784 Epoch 99/100 3/3 [==============================] - 0s 29ms/step - loss: 0.2362 - accuracy: 0.8681 - val_loss: 1.3576 - val_accuracy: 0.6252 Epoch 100/100 3/3 [==============================] - 0s 28ms/step - loss: 0.2316 - accuracy: 0.8681 - val_loss: 1.3308 - val_accuracy: 0.6953
loss, accuracy = model.evaluate(X_test, Y_test)
print("Test Loss: ",loss)
print("Test Accuracy: ",accuracy)
26/26 [==============================] - 0s 1ms/step - loss: 1.3308 - accuracy: 0.6953 Test Loss: 1.3308433294296265 Test Accuracy: 0.6952841877937317
predictions = model.predict(X_test)
26/26 [==============================] - 0s 1ms/step
loss, accuracy = model.evaluate(X_train, Y_train)
print("Test Loss: ",loss)
print("Test Accuracy: ",accuracy)
3/3 [==============================] - 0s 3ms/step - loss: 0.2283 - accuracy: 0.8791 Test Loss: 0.22826984524726868 Test Accuracy: 0.8791208863258362
X_train, X_test, Y_train, Y_test = train_test_split(x, y, test_size = 0.9 , random_state = 42)
from sklearn.preprocessing import StandardScaler
scalar = StandardScaler()
scalar.fit(X_train)
StandardScaler()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
StandardScaler()
heart_data.describe()
| Age | Sex | ChestPainType | RestingBP | Cholesterol | FastingBS | RestingECG | MaxHR | ExerciseAngina | Oldpeak | HeartDisease | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 | 918.000000 |
| mean | 53.510893 | 0.789760 | 0.781046 | 0.824619 | 198.799564 | 0.233115 | 0.603486 | 136.809368 | 0.404139 | 0.887364 | 0.553377 |
| std | 9.432617 | 0.407701 | 0.956519 | 0.380500 | 109.384145 | 0.423046 | 0.805968 | 25.460334 | 0.490992 | 1.066570 | 0.497414 |
| min | 28.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 60.000000 | 0.000000 | -2.600000 | 0.000000 |
| 25% | 47.000000 | 1.000000 | 0.000000 | 1.000000 | 173.250000 | 0.000000 | 0.000000 | 120.000000 | 0.000000 | 0.000000 | 0.000000 |
| 50% | 54.000000 | 1.000000 | 0.000000 | 1.000000 | 223.000000 | 0.000000 | 0.000000 | 138.000000 | 0.000000 | 0.600000 | 1.000000 |
| 75% | 60.000000 | 1.000000 | 2.000000 | 1.000000 | 267.000000 | 0.000000 | 1.000000 | 156.000000 | 1.000000 | 1.500000 | 1.000000 |
| max | 77.000000 | 1.000000 | 3.000000 | 1.000000 | 603.000000 | 1.000000 | 2.000000 | 202.000000 | 1.000000 | 6.200000 | 1.000000 |
X_train_scaled =scalar.transform(X_train)
X_test_scaled = scalar.transform(X_test)
from sklearn.linear_model import LogisticRegression
log_reg = LogisticRegression()
log_reg.fit(X_train_scaled, Y_train)
LogisticRegression()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
LogisticRegression()
log_reg_pred=log_reg.predict(X_test)
from sklearn.metrics import accuracy_score
log_reg_acc = accuracy_score(Y_test, log_reg_pred)
log_reg_acc
C:\Users\AFC\AppData\Local\Programs\Python\Python311\Lib\site-packages\sklearn\base.py:432: UserWarning: X has feature names, but LogisticRegression was fitted without feature names warnings.warn(
0.7762998790810157
from sklearn.tree import DecisionTreeClassifier
dec_tree = DecisionTreeClassifier()
dec_tree.fit(X_train,Y_train)
DecisionTreeClassifier()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
DecisionTreeClassifier()
dec_tree_pre = dec_tree.predict(X_test)
dec_tree_acc= accuracy_score(Y_test, dec_tree_pre)
dec_tree_acc
0.6807738814993954
from sklearn.ensemble import RandomForestClassifier
ran_cla = RandomForestClassifier()
ran_cla.fit(X_train,Y_train)
RandomForestClassifier()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
RandomForestClassifier()
ran_cla_pre = ran_cla.predict(X_test)
ran_cla_acc = accuracy_score(Y_test, ran_cla_pre)
ran_cla_acc
0.7496977025392987
from xgboost import XGBClassifier
xgb = XGBClassifier()
xgb.fit(X_train,Y_train)
XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)xgb_pre = xgb.predict(X_test)
xgb_acc = accuracy_score(Y_test, xgb_pre)
xgb_acc
0.7230955259975816
def print_accuracy_scores(accuracy, log_reg_acc, dec_tree_acc, ran_cla_acc, xgb_acc):
print("Neural Network Accuracy: {:.2f}%".format(accuracy * 100))
print("Logistic Regression Accuracy: {:.2f}%".format(log_reg_acc * 100))
print("Decision Tree Classifier Accuracy: {:.2f}%".format(dec_tree_acc * 100))
print("Random Forest Accuracy: {:.2f}%".format(ran_cla_acc * 100))
print("XGBoost Accuracy: {:.2f}%".format(xgb_acc * 100))
print_accuracy_scores(accuracy, log_reg_acc, dec_tree_acc, ran_cla_acc, xgb_acc)
Neural Network Accuracy: 87.91% Logistic Regression Accuracy: 77.63% Decision Tree Classifier Accuracy: 68.08% Random Forest Accuracy: 74.97% XGBoost Accuracy: 72.31%
Neural Networks Provided the Best Accuracy Score